use cargo::core::Workspace;
use cargo::ops::{self, MessageFormat, Packages};
-use cargo::util::{CliResult, CliError, Config, human, CargoErrorKind};
+use cargo::util::{CliResult, CliError, Config, CargoErrorKind};
use cargo::util::important_paths::{find_root_manifest_for_wd};
#[derive(RustcDecodable)]
None => Ok(()),
Some(err) => {
Err(match err.exit.as_ref().and_then(|e| e.code()) {
- Some(i) => CliError::new(human("bench failed"), i),
+ Some(i) => CliError::new("bench failed".into(), i),
None => CliError::new(CargoErrorKind::CargoTestErrorKind(err).into(), 101)
})
}
use std::path::{Path, PathBuf};
use cargo::core::shell::{Verbosity, ColorConfig};
-use cargo::util::{self, CliResult, lev_distance, Config, human, CargoResult, CargoError, CargoErrorKind};
+use cargo::util::{self, CliResult, lev_distance, Config, CargoResult, CargoError, CargoErrorKind};
use cargo::util::CliError;
#[derive(RustcDecodable)]
let result = (|| {
let args: Vec<_> = try!(env::args_os()
.map(|s| {
- s.into_string().map_err(|s| human(format!("invalid unicode in argument: {:?}", s)))
+ s.into_string().map_err(|s| CargoError::from(format!("invalid unicode in argument: {:?}", s)))
})
.collect());
let rest = &args;
if let Some(ref code) = flags.flag_explain {
let mut procss = config.rustc()?.process();
- procss.arg("--explain").arg(code).exec().map_err(human)?;
+ procss.arg("--explain").arg(code).exec()?;
return Ok(());
}
let command = match path {
Some(command) => command,
None => {
- return Err(human(match find_closest(config, cmd) {
+ return Err(CargoError::from(match find_closest(config, cmd) {
Some(closest) => {
format!("no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
cmd,
-use cargo::util::{CliResult, CliError, Config, human};
+use cargo::util::{CliResult, CliError, Config};
#[derive(RustcDecodable)]
pub struct Options;
// This is a dummy command just so that `cargo help help` works.
// The actual delegation of help flag to subcommands is handled by the
// cargo command.
- Err(CliError::new(human("help command should not be executed directly"), 101))
+ Err(CliError::new("help command should not be executed directly".into(), 101))
}
use cargo;
-use cargo::util::{CliResult, CliError, human, Config};
+use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::{find_root_manifest_for_wd};
#[derive(RustcDecodable)]
let root = find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())?;
let string = root.to_str()
- .ok_or_else(|| human("Your project path contains \
+ .ok_or_else(|| "Your project path contains \
characters not representable in \
- Unicode"))
+ Unicode".into())
.map_err(|e| CliError::new(e, 1))?;
let location = ProjectLocation { root: string.to_string() };
use cargo::ops;
use cargo::core::{SourceId, Source};
use cargo::sources::RegistrySource;
-use cargo::util::{CliResult, CargoResultExt, Config, human};
+use cargo::util::{CliResult, CargoResultExt, Config};
#[derive(RustcDecodable)]
pub struct Options {
let mut line = String::new();
let input = io::stdin();
input.lock().read_line(&mut line).chain_err(|| {
- human("failed to read stdin")
+ "failed to read stdin"
})?;
line
}
use cargo::core::Workspace;
use cargo::ops::{self, CompileOptions, CompileMode, MessageFormat, Packages};
use cargo::util::important_paths::{find_root_manifest_for_wd};
-use cargo::util::{CliResult, CliError, Config, human};
+use cargo::util::{CliResult, CliError, Config};
#[derive(RustcDecodable)]
pub struct Options {
Some("bench") => CompileMode::Bench,
Some("check") => CompileMode::Check,
Some(mode) => {
- let err = human(format!("unknown profile: `{}`, use dev,
- test, or bench", mode));
+ let err = format!("unknown profile: `{}`, use dev,
+ test, or bench", mode).into();
return Err(CliError::new(err, 101))
}
};
use cargo::core::Workspace;
use cargo::ops::{self, MessageFormat, Packages};
-use cargo::util::{CliResult, CliError, human, Config, CargoErrorKind};
+use cargo::util::{CliResult, CliError, Config, CargoErrorKind};
use cargo::util::important_paths::find_root_manifest_for_wd;
#[derive(RustcDecodable)]
None => Ok(()),
Some(err) => {
Err(match err.exit.as_ref().and_then(|e| e.code()) {
- Some(i) => CliError::new(human(err.hint()), i),
+ Some(i) => CliError::new(err.hint().into(), i),
None => CliError::new(CargoErrorKind::CargoTestErrorKind(err).into(), 101),
})
}
use serde::ser;
use core::{SourceId, Summary, PackageId};
-use util::{Cfg, CfgExpr, human, Config};
+use util::{Cfg, CfgExpr, Config};
use util::errors::{CargoResult, CargoResultExt, CargoError};
/// Information about a dependency requested by a Cargo manifest.
if s.starts_with("cfg(") && s.ends_with(")") {
let s = &s[4..s.len()-1];
s.parse().map(Platform::Cfg).chain_err(|| {
- human(format!("failed to parse `{}` as a cfg expression", s))
+ format!("failed to parse `{}` as a cfg expression", s)
})
} else {
Ok(Platform::Name(s.to_string()))
use core::{Dependency, Manifest, PackageId, SourceId, Target};
use core::{Summary, SourceMap};
use ops;
-use util::{Config, LazyCell, internal, human, lev_distance};
+use util::{Config, LazyCell, internal, lev_distance};
use util::errors::{CargoResult, CargoResultExt};
/// Information about a package that is available somewhere in the file system.
internal(format!("couldn't find source for `{}`", id))
})?;
let pkg = source.download(id).chain_err(|| {
- human("unable to get packages from source")
+ "unable to get packages from source"
})?;
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
use url::Url;
use core::PackageId;
-use util::{ToUrl, human, ToSemver};
-use util::errors::{CargoResult, CargoResultExt};
+use util::{ToUrl, ToSemver};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct PackageIdSpec {
let mut parts = spec.splitn(2, ':');
let name = parts.next().unwrap();
let version = match parts.next() {
- Some(version) => Some(Version::parse(version).map_err(human)?),
+ Some(version) => Some(Version::parse(version)?),
None => None,
};
for ch in name.chars() {
where I: IntoIterator<Item=&'a PackageId>
{
let spec = PackageIdSpec::parse(spec).chain_err(|| {
- human(format!("invalid package id specification: `{}`", spec))
+ format!("invalid package id specification: `{}`", spec)
})?;
spec.query(i)
}
url.set_fragment(None);
let (name, version) = {
let mut path = url.path_segments().ok_or_else(|| {
- human(format!("pkgid urls must have a path: {}", url))
+ CargoError::from(format!("pkgid urls must have a path: {}", url))
})?;
let path_name = path.next_back().ok_or_else(|| {
- human(format!("pkgid urls must have at least one path \
- component: {}", url))
+ CargoError::from(format!("pkgid urls must have at least one path \
+ component: {}", url))
})?;
match frag {
Some(fragment) => {
let name_or_version = parts.next().unwrap();
match parts.next() {
Some(part) => {
- let version = part.to_semver().map_err(human)?;
+ let version = part.to_semver()?;
(name_or_version.to_string(), Some(version))
}
None => {
.is_alphabetic() {
(name_or_version.to_string(), None)
} else {
- let version = name_or_version.to_semver()
- .map_err(human)?;
+ let version = name_or_version.to_semver()?;
(path_name.to_string(), Some(version))
}
}
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, vec, self);
- Err(human(msg))
+ Err(msg.into())
}
None => Ok(ret)
};
use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package};
use core::PackageSet;
-use util::{Config, human, profile};
+use util::{Config, profile};
use util::errors::{CargoResult, CargoResultExt};
use sources::config::SourceConfigMap;
// Ensure the source has fetched all necessary remote data.
let _p = profile::start(format!("updating: {}", source_id));
self.sources.get_mut(source_id).unwrap().update()
- })().chain_err(|| human(format!("Unable to update {}", source_id)))
+ })().chain_err(|| format!("Unable to update {}", source_id))
}
fn query_overrides(&mut self, dep: &Dependency)
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
// Ensure the requested source_id is loaded
self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| {
- human(format!("failed to load source for a dependency \
- on `{}`", dep.name()))
+ format!("failed to load source for a dependency \
+ on `{}`", dep.name())
})?;
let override_summary = self.query_overrides(&dep)?;
use core::{PackageId, Registry, SourceId, Summary, Dependency};
use core::PackageIdSpec;
-use util::{Graph, human};
+use util::Graph;
use util::errors::{CargoResult, CargoError};
use util::profile;
use util::graph::{Nodes, Edges};
.collect::<Vec<_>>()
.join(", ")));
- return human(msg)
+ return msg.into()
}
// Once we're all the way down here, we're definitely lost in the
dep.version_req())
};
- human(msg)
+ msg.into()
}
// Returns if `a` and `b` are compatible in the semver sense. This is a
let mut summaries = registry.query(dep)?.into_iter();
let s = summaries.next().ok_or_else(|| {
- human(format!("no matching package for override `{}` found\n\
- location searched: {}\n\
- version required: {}",
- spec, dep.source_id(), dep.version_req()))
+ format!("no matching package for override `{}` found\n\
+ location searched: {}\n\
+ version required: {}",
+ spec, dep.source_id(), dep.version_req())
})?;
let summaries = summaries.collect::<Vec<_>>();
if summaries.len() > 0 {
use sources::git;
use sources::{PathSource, GitSource, RegistrySource, CRATES_IO};
use sources::DirectorySource;
-use util::{human, Config, CargoResult, ToUrl};
+use util::{Config, CargoResult, ToUrl};
/// A Source finds and downloads remote packages based on names and
/// versions.
pub fn from_url(string: &str) -> CargoResult<SourceId> {
let mut parts = string.splitn(2, '+');
let kind = parts.next().unwrap();
- let url = parts.next().ok_or(human(format!("invalid source `{}`", string)))?;
+ let url = parts.next().ok_or_else(|| format!("invalid source `{}`", string))?;
match kind {
"git" => {
let url = url.to_url()?;
Ok(SourceId::new(Kind::Path, url))
}
- kind => Err(human(format!("unsupported source protocol: {}", kind)))
+ kind => Err(format!("unsupported source protocol: {}", kind).into())
}
}
use core::{Package, VirtualManifest, EitherManifest, SourceId};
use core::{PackageIdSpec, Dependency, Profile, Profiles};
use ops;
-use util::{Config, Filesystem, human};
+use util::{Config, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
/// indicating that something else should be passed.
pub fn current(&self) -> CargoResult<&Package> {
self.current_opt().ok_or_else(||
- human(format!("manifest path `{}` is a virtual manifest, but this \
- command requires running against an actual package in \
- this workspace", self.current_manifest.display()))
+ format!("manifest path `{}` is a virtual manifest, but this \
+ command requires running against an actual package in \
+ this workspace", self.current_manifest.display()).into()
)
}
None => return Ok(Vec::new()),
};
let res = glob(path).chain_err(|| {
- human(format!("could not parse pattern `{}`", &path))
+ format!("could not parse pattern `{}`", &path)
})?;
res.map(|p| {
p.chain_err(|| {
- human(format!("unable to match path to pattern `{}`", &path))
+ format!("unable to match path to pattern `{}`", &path)
})
}).collect()
}
use core::shell::Verbosity::{Verbose};
use term::color::{BLACK};
-pub use util::{CargoError, CargoErrorKind, CargoResult, CliError, CliResult, human, Config};
+pub use util::{CargoError, CargoErrorKind, CargoResult, CliError, CliResult, Config};
pub const CARGO_ENV: &'static str = "CARGO";
macro_rules! bail {
($($fmt:tt)*) => (
- return Err(::util::human(&format_args!($($fmt)*)))
+ return Err(::util::errors::CargoError::from(format_args!($($fmt)*).to_string()))
)
}
let flags = docopt.decode().map_err(|e| {
let code = if e.fatal() {1} else {0};
- CliError::new(human(e.to_string()), code)
+ CliError::new(e.to_string().into(), code)
})?;
exec(flags, config)
let _ = shell.err().say(format!(" {}", error), BLACK);
}
- unsafe fn extend_lifetime(r: &Error) -> &'static Error {
- std::mem::transmute::<&Error, &'static Error>(r)
+ //Error inspection in non-verbose mode requires inspecting the
+ //error kind to avoid printing Internal errors. The downcasting
+ //machinery requires &(Error + 'static), but the iterator (and
+ //underlying `cause`) return &Error. Because the borrows are
+ //constrained to this handling method, and because the original
+ //error object is constrained to be 'static, we're casting away
+ //the borrow's actual lifetime for purposes of downcasting and
+ //inspecting the error chain
+ unsafe fn extend_lifetime(r: &Error) -> &(Error + 'static) {
+ std::mem::transmute::<&Error, &Error>(r)
}
let verbose = shell.get_verbose();
for err in cargo_err.iter().skip(1) {
print(err.to_string(), shell);
}
- }
- else {
+ } else {
//The first error has already been printed to the shell
//Print remaining errors until one marked as Internal appears
for err in cargo_err.iter().skip(1) {
use std::path::Path;
use core::{Profiles, Workspace};
-use util::{human, Config};
+use util::Config;
use util::errors::{CargoResult, CargoResultExt};
use ops::{self, Context, BuildConfig, Kind, Unit};
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
fs::remove_dir_all(path).chain_err(|| {
- human("could not remove build directory")
+ "could not remove build directory"
})?;
} else if m.is_ok() {
fs::remove_file(path).chain_err(|| {
- human("failed to remove build artifact")
+ "failed to remove build artifact"
})?;
}
Ok(())
use core::{PackageId, Workspace};
use ops::{self, CompileFilter, DefaultExecutor};
use sources::{GitSource, PathSource, SourceConfigMap};
-use util::{Config, human, internal};
+use util::{Config, internal};
use util::{Filesystem, FileLock};
-use util::errors::{CargoResult, CargoResultExt};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
#[derive(Deserialize, Serialize)]
#[serde(untagged)]
.expect("path sources must have a valid path");
let mut src = PathSource::new(&path, source_id, config);
src.update().chain_err(|| {
- human(format!("`{}` is not a crate root; specify a crate to \
- install from crates.io, or use --path or --git to \
- specify an alternate source", path.display()))
+ format!("`{}` is not a crate root; specify a crate to \
+ install from crates.io, or use --path or --git to \
+ specify an alternate source", path.display())
})?;
select_pkg(PathSource::new(&path, source_id, config),
krate, vers, config, &mut |path| path.read_packages())?
} else {
select_pkg(map.load(source_id)?,
krate, vers, config,
- &mut |_| Err(human("must specify a crate to install from \
- crates.io, or use --path or --git to \
- specify alternate source")))?
+ &mut |_| Err("must specify a crate to install from \
+ crates.io, or use --path or --git to \
+ specify alternate source".into()))?
};
td.into_path();
}
- human(format!("failed to compile `{}`, intermediate artifacts can be \
- found at `{}`", pkg, ws.target_dir().display()))
+ CargoError::from(format!("failed to compile `{}`, intermediate artifacts can be \
+ found at `{}`", pkg, ws.target_dir().display()))
})?;
let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| {
let name = bin.file_name().unwrap();
}
}
fs::copy(src, &dst).chain_err(|| {
- human(format!("failed to copy `{}` to `{}`", src.display(),
- dst.display()))
+ format!("failed to copy `{}` to `{}`", src.display(),
+ dst.display())
})?;
}
let dst = dst.join(bin);
config.shell().status("Installing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| {
- human(format!("failed to move `{}` to `{}`", src.display(),
- dst.display()))
+ format!("failed to move `{}` to `{}`", src.display(),
+ dst.display())
})?;
installed.bins.push(dst);
}
let dst = dst.join(bin);
config.shell().status("Replacing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| {
- human(format!("failed to move `{}` to `{}`", src.display(),
- dst.display()))
+ format!("failed to move `{}` to `{}`", src.display(),
+ dst.display())
})?;
replaced_names.push(bin);
}
None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v))
.unwrap_or(String::new());
- Err(human(format!("could not find `{}` in `{}`{}", name,
- source.source_id(), vers_info)))
+ Err(format!("could not find `{}` in `{}`{}", name,
+ source.source_id(), vers_info).into())
}
}
}
(Some(i1), Some(i2)) => {
let mut v = vec![i1, i2];
v.extend(i);
- Err(human(f(v)))
+ Err(f(v).into())
}
(Some(i), None) => Ok(Some(i)),
(None, _) => Ok(None)
}
}
msg.push_str("Add --force to overwrite");
- Err(human(msg))
+ Err(msg.into())
}
fn find_duplicates(dst: &Path,
}
}
})().chain_err(|| {
- human("failed to parse crate metadata")
+ "failed to parse crate metadata"
})
}
file.write_all(data.as_bytes())?;
Ok(())
})().chain_err(|| {
- human("failed to write crate metadata")
+ "failed to write crate metadata"
})
}
use core::Workspace;
use ops::is_bad_artifact_name;
-use util::{GitRepo, HgRepo, PijulRepo, human, internal};
+use util::{GitRepo, HgRepo, PijulRepo, internal};
use util::{Config, paths};
-use util::errors::{CargoResult, CargoResultExt};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
use toml;
}
let dir_name = path.file_name().and_then(|s| s.to_str()).ok_or_else(|| {
- human(&format!("cannot create a project with a non-unicode name: {:?}",
- path.file_name().unwrap()))
+ CargoError::from(format!("cannot create a project with a non-unicode name: {:?}",
+ path.file_name().unwrap()))
})?;
if opts.bin {
duplicates_checker.insert(i.target_name.as_ref(), i);
} else {
if let Some(plp) = previous_lib_relpath {
- return Err(human(format!("cannot have a project with \
- multiple libraries, \
- found both `{}` and `{}`",
- plp, i.relative_path)));
+ return Err(format!("cannot have a project with \
+ multiple libraries, \
+ found both `{}` and `{}`",
+ plp, i.relative_path).into());
}
previous_lib_relpath = Some(&i.relative_path);
}
};
mk(config, &mkopts).chain_err(|| {
- human(format!("Failed to create project `{}` at `{}`",
- name, path.display()))
+ format!("Failed to create project `{}` at `{}`",
+ name, path.display())
})
}
};
mk(config, &mkopts).chain_err(|| {
- human(format!("Failed to create project `{}` at `{}`",
- name, path.display()))
+ format!("Failed to create project `{}` at `{}`",
+ name, path.display())
})
}
use core::{Package, Workspace, Source, SourceId};
use sources::PathSource;
-use util::{self, human, internal, Config, FileLock};
+use util::{self, internal, Config, FileLock};
use util::errors::{CargoResult, CargoResultExt};
use ops::{self, DefaultExecutor};
config.shell().status("Packaging", pkg.package_id().to_string())?;
dst.file().set_len(0)?;
tar(ws, &src, dst.file(), &filename).chain_err(|| {
- human("failed to prepare local package for uploading")
+ "failed to prepare local package for uploading"
})?;
if opts.verify {
dst.seek(SeekFrom::Start(0))?;
run_verify(ws, dst.file(), opts).chain_err(|| {
- human("failed to verify package tarball")
+ "failed to verify package tarball"
})?
}
dst.seek(SeekFrom::Start(0))?;
let src_path = dst.path();
let dst_path = dst.parent().join(&filename);
fs::rename(&src_path, &dst_path).chain_err(|| {
- human("failed to move temporary tarball into final location")
+ "failed to move temporary tarball into final location"
})?;
}
Ok(Some(dst))
let relative = util::without_prefix(&file, &root).unwrap();
check_filename(relative)?;
let relative = relative.to_str().ok_or_else(|| {
- human(format!("non-utf8 path in source directory: {}",
- relative.display()))
+ format!("non-utf8 path in source directory: {}",
+ relative.display())
})?;
config.shell().verbose(|shell| {
shell.status("Archiving", &relative)
// look at rust-lang/cargo#2326
let mut header = Header::new_ustar();
header.set_path(&path).chain_err(|| {
- human(format!("failed to add to archive: `{}`", relative))
+ format!("failed to add to archive: `{}`", relative)
})?;
let mut file = File::open(file).chain_err(|| {
- human(format!("failed to open for archiving: `{}`", file.display()))
+ format!("failed to open for archiving: `{}`", file.display())
})?;
let metadata = file.metadata().chain_err(|| {
- human(format!("could not learn metadata for: `{}`", relative))
+ format!("could not learn metadata for: `{}`", relative)
})?;
header.set_metadata(&metadata);
use std::path::{Path, PathBuf};
use core::{Package, SourceId, PackageId, EitherManifest};
-use util::{self, paths, human, Config};
+use util::{self, paths, Config};
use util::errors::{CargoResult, CargoResultExt};
use util::important_paths::find_project_manifest_exact;
use util::toml::Layout;
let layout = Layout::from_project_path(path.parent().unwrap());
let root = layout.root.clone();
util::toml::to_manifest(&contents, source_id, layout, config).chain_err(|| {
- human(format!("failed to parse manifest at `{}`",
- root.join("Cargo.toml").display()))
+ format!("failed to parse manifest at `{}`",
+ root.join("Cargo.toml").display())
})
}
})?;
if all_packages.is_empty() {
- Err(human(format!("Could not find Cargo.toml in `{}`", path.display())))
+ Err(format!("Could not find Cargo.toml in `{}`", path.display()).into())
} else {
Ok(all_packages.into_iter().map(|(_, v)| v).collect())
}
return Ok(())
}
Err(e) => {
- return Err(human(e)).chain_err(|| {
- human(format!("failed to read directory `{}`", path.display()))
+ return Err(e).chain_err(|| {
+ format!("failed to read directory `{}`", path.display())
})
}
};
use std::path::Path;
use ops::{self, Packages};
-use util::{self, human, CargoResult, CargoError, ProcessError};
+use util::{self, CargoResult, CargoError, ProcessError};
use util::errors::CargoErrorKind;
use core::Workspace;
0 => ws.current()?,
1 => ws.members()
.find(|pkg| pkg.name() == xs[0])
- .ok_or_else(|| human(
- format!("package `{}` is not a member of the workspace", xs[0])
- ))?,
+ .ok_or_else(||
+ CargoError::from(
+ format!("package `{}` is not a member of the workspace", xs[0]))
+ )?,
_ => unreachable!("cargo run supports single package only"),
}
};
use core::{Package, PackageId, PackageSet, Resolve, Target, Profile};
use core::{TargetKind, Profiles, Dependency, Workspace};
use core::dependency::Kind as DepKind;
-use util::{self, internal, Config, profile, Cfg, CfgExpr, human};
+use util::{self, internal, Config, profile, Cfg, CfgExpr};
use util::errors::{CargoResult, CargoResultExt};
use super::TargetConfig;
has_cfg_and_sysroot = false;
process.exec_with_output()
}).chain_err(|| {
- human(format!("failed to run `rustc` to learn about \
- target-specific information"))
+ format!("failed to run `rustc` to learn about \
+ target-specific information")
})?;
let error = str::from_utf8(&output.stderr).unwrap();
let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
let old_fingerprint = serde_json::from_str(&old_fingerprint_json)
- .chain_err(|| {internal(format!("failed to deserialize json"))})?;
+ .chain_err(|| internal(format!("failed to deserialize json")))?;
new_fingerprint.compare(&old_fingerprint)
}
use core::{PackageId, Target, Profile};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
-use util::{CargoResult, ProcessBuilder, profile, internal, human};
+use util::{CargoResult, ProcessBuilder, profile, internal};
use {handle_error};
use super::{Context, Kind, Unit};
self.emit_warnings(Some(msg), key, cx)?;
if self.active > 0 {
- error = Some(human("build failed"));
+ error = Some("build failed".into());
handle_error(e, &mut *cx.config.shell());
cx.config.shell().say(
"Build failed, waiting for other \
use std::path::{PathBuf, Path};
use core::Workspace;
-use util::{Config, FileLock, CargoResult, Filesystem, human};
+use util::{Config, FileLock, CargoResult, Filesystem};
pub struct Layout {
root: PathBuf,
// the target triple as a Path and then just use the file stem as the
// component for the directory name.
if let Some(triple) = triple {
- path.push(Path::new(triple).file_stem().ok_or(human("target was empty".to_string()))?);
+ path.push(Path::new(triple).file_stem().ok_or_else(|| "target was empty")?);
}
path.push(dest);
Layout::at(ws.config(), path)
use core::{Package, PackageId, PackageSet, Target, Resolve};
use core::{Profile, Profiles, Workspace};
use core::shell::ColorConfig;
-use util::{self, ProcessBuilder, human, machine_message};
+use util::{self, ProcessBuilder, machine_message};
use util::{Config, internal, profile, join_paths, short_hash};
use util::errors::{CargoResult, CargoResultExt};
use util::Freshness;
for dst in &dsts {
if fs::metadata(dst).is_ok() {
fs::remove_file(dst).chain_err(|| {
- human(format!("Could not remove file: {}.", dst.display()))
+ format!("Could not remove file: {}.", dst.display())
})?;
}
}
Ok(())
}
).chain_err(|| {
- human(format!("Could not compile `{}`.", name))
+ format!("Could not compile `{}`.", name)
})?;
} else {
exec.exec(rustc, &package_id).map_err(|e| e.to_internal()).chain_err(|| {
- human(format!("Could not compile `{}`.", name))
+ format!("Could not compile `{}`.", name)
})?;
}
debug!("linking {} to {}", src.display(), dst.display());
if dst.exists() {
fs::remove_file(&dst).chain_err(|| {
- human(format!("failed to remove: {}", dst.display()))
+ format!("failed to remove: {}", dst.display())
})?;
}
fs::hard_link(src, dst)
fs::copy(src, dst).map(|_| ())
})
.chain_err(|| {
- human(format!("failed to link or copy `{}` to `{}`",
- src.display(), dst.display()))
+ format!("failed to link or copy `{}` to `{}`",
+ src.display(), dst.display())
})?;
}
}
}
state.running(&rustdoc);
- rustdoc.exec().chain_err(|| {
- human(format!("Could not document `{}`.", name))
- })
+ rustdoc.exec().chain_err(|| format!("Could not document `{}`.", name))
}))
}
shell.status("Running", cmd.to_string())
})?;
- if let Err(CargoError(CargoErrorKind::ProcessErrorKind(e), .. )) = cmd.exec() {
- errors.push(e);
- if !options.no_fail_fast {
- return Ok((Test::UnitTest(kind.clone(), test.clone()), errors))
+ let result = cmd.exec();
+
+ match result {
+ Err(CargoError(CargoErrorKind::ProcessErrorKind(e), .. )) => {
+ errors.push(e);
+ if !options.no_fail_fast {
+ return Ok((Test::UnitTest(kind.clone(), test.clone()), errors))
+ }
+ }
+ Err(e) => {
+ //This is an unexpected Cargo error rather than a test failure
+ return Err(e)
}
+ Ok(()) => {}
}
}
Ok((Test::Multiple, errors))
use core::{Resolve, resolver, Workspace};
use core::resolver::WorkspaceResolve;
-use util::{human, Filesystem};
+use util::Filesystem;
use util::errors::{CargoResult, CargoResultExt};
use util::toml as cargo_toml;
let mut s = String::new();
f.read_to_string(&mut s).chain_err(|| {
- human(format!("failed to read file: {}", f.path().display()))
+ format!("failed to read file: {}", f.path().display())
})?;
(|| -> CargoResult<Option<Resolve>> {
let v: resolver::EncodableResolve = resolve.try_into()?;
Ok(Some(v.into_resolve(ws)?))
})().chain_err(|| {
- human(format!("failed to parse lock file at: {}", f.path().display()))
+ format!("failed to parse lock file at: {}", f.path().display())
})
}
f.write_all(out.as_bytes())?;
Ok(())
}).chain_err(|| {
- human(format!("failed to write {}",
- ws.root().join("Cargo.lock").display()))
+ format!("failed to write {}",
+ ws.root().join("Cargo.lock").display())
})
}
use sources::{RegistrySource};
use util::config;
use util::paths;
-use util::{human, ToUrl};
+use util::ToUrl;
use util::config::{Config, ConfigValue, Location};
-use util::errors::{CargoResult, CargoResultExt};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
use util::important_paths::find_root_manifest_for_wd;
pub struct RegistryConfig {
Ok(())
},
- Err(e) => Err(human(e.to_string())),
+ Err(e) => Err(e.into()),
}
}
let api_host = {
let mut src = RegistrySource::remote(&sid, config);
src.update().chain_err(|| {
- human(format!("failed to update {}", sid))
+ format!("failed to update {}", sid)
})?;
(src.config()?).unwrap().api
};
config.shell().status("Owner", format!("adding {:?} to crate {}",
v, name))?;
registry.add_owners(&name, &v).map_err(|e| {
- human(format!("failed to add owners to crate {}: {}", name, e))
+ CargoError::from(format!("failed to add owners to crate {}: {}", name, e))
})?;
}
config.shell().status("Owner", format!("removing {:?} from crate {}",
v, name))?;
registry.remove_owners(&name, &v).map_err(|e| {
- human(format!("failed to remove owners from crate {}: {}", name, e))
+ CargoError::from(format!("failed to remove owners from crate {}: {}", name, e))
})?;
}
if opts.list {
let owners = registry.list_owners(&name).map_err(|e| {
- human(format!("failed to list owners of crate {}: {}", name, e))
+ CargoError::from(format!("failed to list owners of crate {}: {}", name, e))
})?;
for owner in owners.iter() {
print!("{}", owner.login);
if undo {
config.shell().status("Unyank", format!("{}:{}", name, version))?;
registry.unyank(&name, &version).map_err(|e| {
- human(format!("failed to undo a yank: {}", e))
+ CargoError::from(format!("failed to undo a yank: {}", e))
})?;
} else {
config.shell().status("Yank", format!("{}:{}", name, version))?;
registry.yank(&name, &version).map_err(|e| {
- human(format!("failed to yank: {}", e))
+ CargoError::from(format!("failed to yank: {}", e))
})?;
}
let (mut registry, _) = registry(config, None, index)?;
let (crates, total_crates) = registry.search(query, limit).map_err(|e| {
- human(format!("failed to retrieve search results from the registry: {}", e))
+ CargoError::from(format!("failed to retrieve search results from the registry: {}", e))
})?;
let list_items = crates.iter()
use core::resolver::{self, Resolve, Method};
use sources::PathSource;
use ops;
-use util::{profile, human};
+use util::profile;
use util::errors::{CargoResult, CargoResultExt};
/// Resolve all dependencies for the workspace using the previous
let id = SourceId::for_path(&path)?;
let mut source = PathSource::new_recursive(&path, &id, ws.config());
source.update().chain_err(|| {
- human(format!("failed to update path override `{}` \
+ format!("failed to update path override `{}` \
(defined in `{}`)", path.display(),
- definition.display()))
+ definition.display())
})?;
registry.add_override(Box::new(source));
}
use core::{Source, SourceId};
use sources::ReplacedSource;
-use util::{Config, human, ToUrl};
+use util::{Config, ToUrl};
use util::config::ConfigValue;
-use util::errors::{CargoResult, CargoResultExt};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
pub struct SourceConfigMap<'cfg> {
cfgs: HashMap<String, SourceConfig>,
let mut srcs = srcs.into_iter();
let src = srcs.next().ok_or_else(|| {
- human(format!("no source URL specified for `source.{}`, need \
- either `registry` or `local-registry` defined",
- name))
+ CargoError::from(format!("no source URL specified for `source.{}`, need \
+ either `registry` or `local-registry` defined",
+ name))
})?;
if srcs.next().is_some() {
- return Err(human(format!("more than one source URL specified for \
- `source.{}`", name)))
+ return Err(format!("more than one source URL specified for \
+ `source.{}`", name).into())
}
let mut replace_with = None;
fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
let (url, path) = cfg.string(key)?;
url.to_url().chain_err(|| {
- human(format!("configuration key `{}` specified an invalid \
- URL (in {})", key, path.display()))
+ format!("configuration key `{}` specified an invalid \
+ URL (in {})", key, path.display())
})
}
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use sources::PathSource;
-use util::{human, Config, Sha256};
-use util::errors::{CargoResult, CargoResultExt, CargoError};
+use util::{Config, Sha256};
+use util::errors::{CargoResult, CargoResultExt};
use util::paths;
pub struct DirectorySource<'cfg> {
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
- human(format!("failed to read root of directory source: {}",
- self.root.display()))
+ format!("failed to read root of directory source: {}",
+ self.root.display())
})?;
for entry in entries {
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
- human(format!("failed to load checksum `.cargo-checksum.json` \
- of {} v{}",
- pkg.package_id().name(),
- pkg.package_id().version()))
+ format!("failed to load checksum `.cargo-checksum.json` \
+ of {} v{}",
+ pkg.package_id().name(),
+ pkg.package_id().version())
})?;
let cksum: Checksum = serde_json::from_str(&cksum)
- .map_err(CargoError::from).chain_err(|| {
- human(format!("failed to decode `.cargo-checksum.json` of \
- {} v{}",
- pkg.package_id().name(),
- pkg.package_id().version()))
+ .chain_err(|| {
+ format!("failed to decode `.cargo-checksum.json` of \
+ {} v{}",
+ pkg.package_id().name(),
+ pkg.package_id().version())
})?;
let mut manifest = pkg.manifest().clone();
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages.get(id).map(|p| &p.0).cloned().ok_or_else(|| {
- human(format!("failed to find package with id: {}", id))
+ format!("failed to find package with id: {}", id).into()
})
}
}
}
})().chain_err(|| {
- human(format!("failed to calculate checksum of: {}",
- file.display()))
+ format!("failed to calculate checksum of: {}",
+ file.display())
})?;
let actual = h.finish().to_hex();
use url::Url;
use core::GitReference;
-use util::{human, ToUrl, internal, Config, network};
+use util::{ToUrl, internal, Config, network};
use util::errors::{CargoResult, CargoResultExt, CargoError};
#[derive(PartialEq, Clone, Debug)]
let repo = match git2::Repository::open(into) {
Ok(repo) => {
self.fetch_into(&repo, cargo_config).chain_err(|| {
- human(format!("failed to fetch into {}", into.display()))
+ format!("failed to fetch into {}", into.display())
})?;
repo
}
Err(..) => {
self.clone_into(into, cargo_config).chain_err(|| {
- human(format!("failed to clone into: {}", into.display()))
+ format!("failed to clone into: {}", into.display())
})?
}
};
}
fs::create_dir_all(dst)?;
let repo = git2::Repository::init_bare(dst)?;
- fetch(&repo, &url, "refs/heads/*:refs/heads/*", cargo_config)?;
+ fetch(&repo, &url, "refs/heads/*:refs/heads/*", cargo_config)?;
Ok(repo)
}
}
let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
})().chain_err(|| {
- human(format!("failed to find tag `{}`", s))
+ format!("failed to find tag `{}`", s)
})?
}
GitReference::Branch(ref s) => {
(|| {
let b = self.repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().ok_or_else(|| {
- human(format!("branch `{}` did not have a target", s))
+ CargoError::from(format!("branch `{}` did not have a target", s))
})
})().chain_err(|| {
- human(format!("failed to find branch `{}`", s))
+ format!("failed to find branch `{}`", s)
})?
}
GitReference::Rev(ref s) => {
fn clone_repo(source: &Path, into: &Path) -> CargoResult<git2::Repository> {
let dirname = into.parent().unwrap();
- fs::create_dir_all(&dirname).map_err(CargoError::from).chain_err(|| {
- human(format!("Couldn't mkdir {}", dirname.display()))
+ fs::create_dir_all(&dirname).chain_err(|| {
+ format!("Couldn't mkdir {}", dirname.display())
})?;
if fs::metadata(&into).is_ok() {
- fs::remove_dir_all(into).map_err(CargoError::from).chain_err(|| {
- human(format!("Couldn't rmdir {}", into.display()))
+ fs::remove_dir_all(into).chain_err(|| {
+ format!("Couldn't rmdir {}", into.display())
})?;
}
let url = source.to_url()?;
let url = url.to_string();
let repo = git2::Repository::clone(&url, into)
- .map_err(CargoError::from)
.chain_err(|| {
internal(format!("failed to clone {} into {}", source.display(),
into.display()))
let ok_file = self.location.join(".cargo-ok");
let _ = fs::remove_file(&ok_file);
info!("reset {} to {}", self.repo.path().display(), self.revision);
- self.repo.find_object(self.revision.0, None)
- .and_then(|obj| {self.repo.reset(&obj, git2::ResetType::Hard, None)})?;
+ let object = self.repo.find_object(self.revision.0, None)?;
+ self.repo.reset(&object, git2::ResetType::Hard, None)?;
File::create(ok_file)?;
Ok(())
}
for mut child in repo.submodules()?.into_iter() {
update_submodule(repo, &mut child, cargo_config).chain_err(|| {
- human(format!("failed to update submodule `{}`",
- child.name().unwrap_or("")))
+ format!("failed to update submodule `{}`",
+ child.name().unwrap_or(""))
})?;
}
Ok(())
credentials were incorrect");
}
}
- human(msg)
+ msg
})
}
network::with_retry(config, || {
remote.fetch(&[refspec], Some(&mut opts), None)
- .map_err(network::NetworkError::from)
+ .map_err(CargoError::from)
})?;
Ok(())
})
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use ops;
-use util::{self, CargoResult, internal, internal_error, human};
+use util::{self, CargoError, CargoResult, internal};
use util::Config;
pub struct PathSource<'cfg> {
let parse = |p: &String| {
Pattern::new(p).map_err(|e| {
- human(format!("could not parse pattern `{}`: {}", p, e))
+ CargoError::from(format!("could not parse pattern `{}`: {}", p, e))
})
};
warn!("list_files_git {}", pkg.package_id());
let index = repo.index()?;
let root = repo.workdir().ok_or_else(|| {
- internal_error("Can't list files on a bare repository.", "")
+ internal("Can't list files on a bare repository.")
})?;
let pkg_path = pkg.root();
warn!(" found submodule {}", file_path.display());
let rel = util::without_prefix(&file_path, root).unwrap();
let rel = rel.to_str().ok_or_else(|| {
- human(format!("invalid utf-8 filename: {}", rel.display()))
+ CargoError::from(format!("invalid utf-8 filename: {}", rel.display()))
})?;
// Git submodules are currently only named through `/` path
// separators, explicitly not `\` which windows uses. Who knew?
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
if !self.updated {
- return Err(internal_error("BUG: source was not updated", ""));
+ return Err(internal("BUG: source was not updated"));
}
let mut max = FileTime::zero();
use core::{SourceId, Summary, PackageId, Registry};
use sources::registry::{RegistryPackage, RegistryDependency, INDEX_LOCK};
use sources::registry::RegistryData;
-use util::{CargoResult, internal, Filesystem, Config};
-use util::human;
+use util::{CargoError, CargoResult, internal, Filesystem, Config};
pub struct RegistryIndex<'cfg> {
source_id: SourceId,
match load.load(&root, Path::new(&path)) {
Ok(contents) => {
let contents = str::from_utf8(&contents).map_err(|_| {
- human("registry index file was not valid utf-8")
+ CargoError::from("registry index file was not valid utf-8")
})?;
let lines = contents.lines()
.map(|s| s.trim())
use sources::registry::{RegistryData, RegistryConfig};
use util::FileLock;
use util::paths;
-use util::{Config, human, Sha256, Filesystem};
+use util::{Config, Sha256, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
pub struct LocalRegistry<'cfg> {
let mut buf = [0; 64 * 1024];
loop {
let n = crate_file.read(&mut buf).chain_err(|| {
- human(format!("failed to read `{}`", crate_file.path().display()))
+ format!("failed to read `{}`", crate_file.path().display())
})?;
if n == 0 {
break
use sources::registry::{RegistryData, RegistryConfig, INDEX_LOCK};
use util::network;
use util::{FileLock, Filesystem, LazyCell};
-use util::{Config, human, Sha256, ToUrl};
-use util::errors::{CargoResult, CargoResultExt};
+use util::{Config, Sha256, ToUrl};
+use util::errors::{CargoErrorKind, CargoResult, CargoResultExt};
pub struct RemoteRegistry<'cfg> {
index_path: Filesystem,
let url = self.source_id.url().to_string();
let refspec = "refs/heads/master:refs/remotes/origin/master";
git::fetch(&repo, &url, refspec, self.config).chain_err(|| {
- human(format!("failed to fetch `{}`", url))
+ format!("failed to fetch `{}`", url)
})?;
}
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&url);
- Err(network::NetworkErrorKind::HttpNot200(code, url.to_string()).into())
+ Err(CargoErrorKind::HttpNot200(code, url.to_string()).into())
} else {
Ok(())
}
use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId};
-use util::{CargoResult, human};
-use util::errors::CargoResultExt;
+use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> {
to_replace: SourceId,
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
let dep = dep.clone().map_source(&self.to_replace, &self.replace_with);
let ret = self.inner.query(&dep).chain_err(|| {
- human(format!("failed to query replaced source `{}`",
- self.to_replace))
+ format!("failed to query replaced source `{}`",
+ self.to_replace)
})?;
Ok(ret.into_iter().map(|summary| {
summary.map_source(&self.replace_with, &self.to_replace)
fn update(&mut self) -> CargoResult<()> {
self.inner.update().chain_err(|| {
- human(format!("failed to update replaced source `{}`",
- self.to_replace))
+ format!("failed to update replaced source `{}`",
+ self.to_replace)
})
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner.download(&id).chain_err(|| {
- human(format!("failed to download replaced source `{}`",
- self.to_replace))
+ format!("failed to download replaced source `{}`",
+ self.to_replace)
})?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace))
}
use std::iter;
use std::fmt;
-use util::{CargoError, CargoResult, human};
+use util::{CargoError, CargoResult};
#[derive(Clone, PartialEq, Debug)]
pub enum Cfg {
return Some(Ok(Token::String(&self.orig[start+1..end])))
}
}
- return Some(Err(human("unterminated string in cfg".to_string())))
+ return Some(Err("unterminated string in cfg".into()))
}
Some((start, ch)) if is_ident_start(ch) => {
while let Some(&(end, ch)) = self.s.peek() {
return Some(Ok(Token::Ident(&self.orig[start..])))
}
Some((_, ch)) => {
- return Some(Err(human(format!("unexpected character in \
+ return Some(Err(format!("unexpected character in \
cfg `{}`, expected parens, \
a comma, an identifier, or \
- a string", ch))))
+ a string", ch).into()))
}
None => return None
}
use toml;
use core::shell::{Verbosity, ColorConfig};
use core::MultiShell;
-use util::{CargoResult, CargoError, Rustc, internal, human};
-use util::errors::CargoResultExt;
+use util::Rustc;
+use util::errors::{CargoResult, CargoResultExt, CargoError, internal};
use util::{Filesystem, LazyCell};
use util::paths;
pub fn default() -> CargoResult<Config> {
let shell = ::shell(Verbosity::Verbose, ColorConfig::Auto);
let cwd = env::current_dir().chain_err(|| {
- human("couldn't get the current directory of the process")
+ "couldn't get the current directory of the process"
})?;
let homedir = homedir(&cwd).ok_or_else(|| {
- human("Cargo couldn't find your home directory. \
- This probably means that $HOME was not set.")
+ "Cargo couldn't find your home directory. \
+ This probably means that $HOME was not set."
})?;
Ok(Config::new(shell, cwd, homedir))
}
pub fn cargo_exe(&self) -> CargoResult<&Path> {
self.cargo_exe.get_or_try_init(||
env::current_exe().and_then(|path| path.canonicalize())
- .chain_err(|| {
- human("couldn't get the path to cargo executable")
- })
+ .chain_err(|| "couldn't get the path to cargo executable")
).map(AsRef::as_ref)
}
pub fn set_values(&self, values: HashMap<String, ConfigValue>) -> CargoResult<()> {
if self.values.borrow().is_some() {
- return Err(human("Config values already found"));
+ return Err("Config values already found".into());
}
match self.values.fill(values) {
Ok(()) => Ok(()),
- Err(_) => Err(human("Could not fill values")),
+ Err(_) => Err("Could not fill values".into()),
}
}
pub fn expected<T>(&self, ty: &str, key: &str, val: CV) -> CargoResult<T> {
val.expected(ty, key).map_err(|e| {
- human(format!("invalid configuration for key `{}`\n{}", key, e))
+ format!("invalid configuration for key `{}`\n{}", key, e).into()
})
}
walk_tree(&self.cwd, |mut file, path| {
let mut contents = String::new();
file.read_to_string(&mut contents).chain_err(|| {
- human(format!("failed to read configuration file `{}`",
- path.display()))
+ format!("failed to read configuration file `{}`",
+ path.display())
})?;
let toml = cargo_toml::parse(&contents,
&path,
self).chain_err(|| {
- human(format!("could not parse TOML configuration in `{}`",
- path.display()))
+ format!("could not parse TOML configuration in `{}`",
+ path.display())
})?;
let value = CV::from_toml(&path, toml).chain_err(|| {
- human(format!("failed to load TOML configuration from `{}`",
- path.display()))
+ format!("failed to load TOML configuration from `{}`",
+ path.display())
})?;
cfg.merge(value).chain_err(|| {
- human(format!("failed to merge configuration at `{}`",
- path.display()))
+ format!("failed to merge configuration at `{}`", path.display())
})?;
Ok(())
- }).chain_err(|| human("Couldn't load Cargo configuration"))?;
+ }).chain_err(|| "Couldn't load Cargo configuration")?;
match cfg {
Ok(CV::List(val.into_iter().map(|toml| {
match toml {
toml::Value::String(val) => Ok((val, path.to_path_buf())),
- v => Err(human(format!("expected string but found {} \
- in list", v.type_str()))),
+ v => Err(format!("expected string but found {} \
+ in list", v.type_str()).into()),
}
}).collect::<CargoResult<_>>()?, path.to_path_buf()))
}
toml::Value::Table(val) => {
Ok(CV::Table(val.into_iter().map(|(key, value)| {
let value = CV::from_toml(path, value).chain_err(|| {
- human(format!("failed to parse key `{}`", key))
+ format!("failed to parse key `{}`", key)
})?;
Ok((key, value))
}).collect::<CargoResult<_>>()?, path.to_path_buf()))
let path = value.definition_path().to_path_buf();
let entry = entry.get_mut();
entry.merge(value).chain_err(|| {
- human(format!("failed to merge key `{}` between \
- files:\n \
- file 1: {}\n \
- file 2: {}",
- key,
- entry.definition_path().display(),
- path.display()))
+ format!("failed to merge key `{}` between \
+ files:\n \
+ file 1: {}\n \
+ file 2: {}",
+ key,
+ entry.definition_path().display(),
+ path.display())
})?;
}
}
fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> {
- Err(human(format!("expected a {}, but found a {} for `{}` in {}",
- wanted, self.desc(), key,
- self.definition_path().display())))
+ Err(format!("expected a {}, but found a {} for `{}` in {}",
+ wanted, self.desc(), key,
+ self.definition_path().display()).into())
}
fn into_toml(self) -> toml::Value {
// in our history to be sure we pick up that standard location for
// information.
let home = homedir(pwd).ok_or_else(|| {
- human("Cargo couldn't find your home directory. \
- This probably means that $HOME was not set.")
+ CargoError::from("Cargo couldn't find your home directory. \
+ This probably means that $HOME was not set.")
})?;
let config = home.join("config");
if !stash.contains(&config) && fs::metadata(&config).is_ok() {
use std::string;
use core::TargetKind;
-use util::network::{NetworkError, NetworkErrorKind};
use curl;
use git2;
use serde_json;
use term;
use toml;
+use registry;
error_chain! {
types {
}
links {
- Network(NetworkError, NetworkErrorKind);
+ CrateRegistry(registry::Error, registry::ErrorKind);
}
foreign_links {
ParseSemver(semver::ReqParseError);
+ Semver(semver::SemVerError);
Io(io::Error);
SerdeJson(serde_json::Error);
TomlSer(toml::ser::Error);
}
errors {
- Internal(description: String, details: Option<String>){
+ Internal(description: String){
description(&description)
display("{}", &description)
}
description(&test_err.desc)
display("{}", &test_err.desc)
}
+ HttpNot200(code: u32, url: String) {
+ description("failed to get a 200 response")
+ display("failed to get 200 response from `{}`, got {}", url, code)
+ }
}
}
impl CargoError {
pub fn to_internal(self) -> Self {
- CargoError(CargoErrorKind::Internal(self.description().to_string(), None), self.1)
+ CargoError(CargoErrorKind::Internal(self.description().to_string()), self.1)
}
fn is_human(&self) -> bool {
&CargoErrorKind::TomlSer(_) => true,
&CargoErrorKind::TomlDe(_) => true,
&CargoErrorKind::Curl(_) => true,
- &CargoErrorKind::Network(ref net_err) => {
- match net_err {
- &NetworkErrorKind::HttpNot200(_, _) => true,
- &NetworkErrorKind::Curl(_) => true,
- _ => false
- }
- },
- _ => false
+ &CargoErrorKind::HttpNot200(..) => true,
+ &CargoErrorKind::CrateRegistry(_) |
+ &CargoErrorKind::ParseSemver(_) |
+ &CargoErrorKind::Semver(_) |
+ &CargoErrorKind::Io(_) |
+ &CargoErrorKind::SerdeJson(_) |
+ &CargoErrorKind::Term(_) |
+ &CargoErrorKind::ParseInt(_) |
+ &CargoErrorKind::ParseBool(_) |
+ &CargoErrorKind::Parse(_) |
+ &CargoErrorKind::Git(_) |
+ &CargoErrorKind::Internal(_) |
+ &CargoErrorKind::ProcessErrorKind(_) |
+ &CargoErrorKind::CargoTestErrorKind(_) => false
}
}
}
}
}
-pub fn internal_error(error: &str, detail: &str) -> CargoError {
- CargoErrorKind::Internal(error.to_string(), Some(detail.to_string())).into()
-}
-
pub fn internal<S: fmt::Display>(error: S) -> CargoError {
_internal(&error)
}
fn _internal(error: &fmt::Display) -> CargoError {
- CargoErrorKind::Internal(error.to_string(), None).into()
-}
-
-pub fn human<S: fmt::Display>(error: S) -> CargoError {
- _human(&error)
-}
-
-fn _human(error: &fmt::Display) -> CargoError {
- CargoErrorKind::Msg(error.to_string()).into()
+ CargoErrorKind::Internal(error.to_string()).into()
}
#[allow(unused_imports)]
use libc;
-use util::{Config, human};
+use util::Config;
use util::errors::{CargoResult, CargoResultExt};
pub struct FileLock {
Err(e)
}
}).chain_err(|| {
- human(format!("failed to open: {}", path.display()))
+ format!("failed to open: {}", path.display())
})?;
match state {
State::Exclusive => {
Err(e) => {
if e.raw_os_error() != lock_contended_error().raw_os_error() {
- return Err(human(e)).chain_err(|| {
- human(format!("failed to lock file: {}", path.display()))
+ return Err(e).chain_err(|| {
+ format!("failed to lock file: {}", path.display())
})
}
}
config.shell().err().say_status("Blocking", &msg, CYAN, true)?;
return block().chain_err(|| {
- human(format!("failed to lock file: {}", path.display()))
+ format!("failed to lock file: {}", path.display())
});
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
use std::fs;
use std::path::{Path, PathBuf};
-use util::{CargoResult, human};
+use util::errors::CargoResult;
use util::paths;
/// Iteratively search for `file` in `pwd` and its parents, returning
if fs::metadata(&manifest).is_ok() {
Ok(manifest)
} else {
- Err(human(format!("Could not find `{}` in `{}`",
- file, pwd.display())))
+ Err(format!("Could not find `{}` in `{}`",
+ file, pwd.display()).into())
}
}
pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness};
pub use self::errors::{CargoResult, CargoResultExt, CargoError, CargoErrorKind, Test, CliResult};
pub use self::errors::{CliError, ProcessError, CargoTestError};
-pub use self::errors::{process_error, internal_error, internal, human};
+pub use self::errors::{process_error, internal};
pub use self::flock::{FileLock, Filesystem};
pub use self::graph::Graph;
pub use self::hex::{to_hex, short_hash, hash_u64};
use util::Config;
-use util::errors::CargoResult;
+use util::errors::{CargoError, CargoErrorKind, CargoResult};
-use curl;
use git2;
-// =============================================================================
-// NetworkError chain
-error_chain!{
- types {
- NetworkError, NetworkErrorKind, NetworkResultExt, NetworkResult;
- }
-
- foreign_links {
- Git(git2::Error);
- Curl(curl::Error);
- }
-
- errors {
- HttpNot200(code: u32, url: String) {
- description("failed to get a 200 response")
- display("failed to get 200 response from `{}`, got {}", url, code)
- }
- }
-}
-
-impl NetworkError {
- pub fn maybe_spurious(&self) -> bool {
- match &self.0 {
- &NetworkErrorKind::Msg(_) => false,
- &NetworkErrorKind::Git(ref git_err) => {
+fn maybe_spurious(err: &CargoError) -> bool {
+ match err.kind() {
+ &CargoErrorKind::Git(ref git_err) => {
match git_err.class() {
git2::ErrorClass::Net |
git2::ErrorClass::Os => true,
_ => false
}
}
- &NetworkErrorKind::Curl(ref curl_err) => {
+ &CargoErrorKind::Curl(ref curl_err) => {
curl_err.is_couldnt_connect() ||
curl_err.is_couldnt_resolve_proxy() ||
curl_err.is_couldnt_resolve_host() ||
curl_err.is_operation_timedout() ||
curl_err.is_recv_error()
}
- &NetworkErrorKind::HttpNot200(code, ref _url) => {
+ &CargoErrorKind::HttpNot200(code, ref _url) => {
500 <= code && code < 600
}
- }
+ _ => false
}
}
/// cargo_result = network.with_retry(&config, || something.download());
/// ```
pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
- where F: FnMut() -> NetworkResult<T>
+ where F: FnMut() -> CargoResult<T>
{
let mut remaining = config.net_retry()?;
loop {
match callback() {
Ok(ret) => return Ok(ret),
- Err(ref e) if e.maybe_spurious() && remaining > 0 => {
+ Err(ref e) if maybe_spurious(e) && remaining > 0 => {
let msg = format!("spurious network error ({} tries \
remaining): {}", remaining, e);
config.shell().warn(msg)?;
#[test]
fn with_retry_repeats_the_call_then_works() {
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
- let error1 = NetworkErrorKind::HttpNot200(501, "Uri".to_string()).into();
- let error2 = NetworkErrorKind::HttpNot200(502, "Uri".to_string()).into();
- let mut results: Vec<NetworkResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
+ let error1 = CargoErrorKind::HttpNot200(501, "Uri".to_string()).into();
+ let error2 = CargoErrorKind::HttpNot200(502, "Uri".to_string()).into();
+ let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap();
let result = with_retry(&config, || results.pop().unwrap());
assert_eq!(result.unwrap(), ())
use std::io::prelude::*;
use std::path::{Path, PathBuf, Component};
-use util::{human, internal, CargoResult};
+use util::{internal, CargoResult};
use util::errors::CargoResultExt;
pub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> CargoResult<OsString> {
env::join_paths(paths.iter()).or_else(|e| {
let paths = paths.iter().map(Path::new).collect::<Vec<_>>();
Err(internal(format!("failed to join path array: {:?}", paths))).chain_err(|| {
- human(format!("failed to join search paths together: {}\n\
+ format!("failed to join search paths together: {}\n\
Does ${} have an unterminated quote character?",
- e, env))
+ e, env)
})
})
}
let mut f = File::open(path)?;
f.read_to_string(&mut ret)?;
Ok(ret)
- })().map_err(human).chain_err(|| {
- human(format!("failed to read `{}`", path.display()))
+ })().chain_err(|| {
+ format!("failed to read `{}`", path.display())
})
}
let mut f = File::open(path)?;
f.read_to_end(&mut ret)?;
Ok(ret)
- })().map_err(human).chain_err(|| {
- human(format!("failed to read `{}`", path.display()))
+ })().chain_err(|| {
+ format!("failed to read `{}`", path.display())
})
}
let mut f = File::create(path)?;
f.write_all(contents)?;
Ok(())
- })().map_err(human).chain_err(|| {
- human(format!("failed to write `{}`", path.display()))
+ })().chain_err(|| {
+ format!("failed to write `{}`", path.display())
})
}
pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> {
match path.as_os_str().to_str() {
Some(s) => Ok(s.as_bytes()),
- None => Err(human(format!("invalid non-unicode path: {}",
- path.display())))
+ None => Err(format!("invalid non-unicode path: {}",
+ path.display()).into())
}
}
use std::str;
match str::from_utf8(bytes) {
Ok(s) => Ok(PathBuf::from(s)),
- Err(..) => Err(human("invalid non-unicode path")),
+ Err(..) => Err("invalid non-unicode path".into()),
}
}
use url::Url;
-use util::{human, CargoResult};
+use util::CargoResult;
pub trait ToUrl {
fn to_url(self) -> CargoResult<Url>;
impl<'a> ToUrl for &'a str {
fn to_url(self) -> CargoResult<Url> {
Url::parse(self).map_err(|s| {
- human(format!("invalid url `{}`: {}", self, s))
+ format!("invalid url `{}`: {}", self, s).into()
})
}
}
impl<'a> ToUrl for &'a Path {
fn to_url(self) -> CargoResult<Url> {
Url::from_file_path(self).map_err(|()| {
- human(format!("invalid path url `{}`", self.display()))
+ format!("invalid path url `{}`", self.display()).into()
})
}
}
use core::manifest::{LibKind, Profile, ManifestMetadata};
use ops::is_bad_artifact_name;
use sources::CRATES_IO;
-use util::{self, CargoResult, human, ToUrl, ToSemver, Config};
-use util::errors::CargoResultExt;
+use util::{self, ToUrl, ToSemver, Config};
+use util::errors::{CargoError, CargoResult, CargoResultExt};
/// Representation of the projects file layout.
///
}
Err(first_error).chain_err(|| {
- human("could not parse input as TOML")
+ "could not parse input as TOML"
})
}
let project = me.project.as_ref().or_else(|| me.package.as_ref());
let project = project.ok_or_else(|| {
- human("no `package` or `project` section found.")
+ CargoError::from("no `package` or `project` section found.")
})?;
if project.name.trim().is_empty() {
let mut replace = Vec::new();
for (spec, replacement) in self.replace.iter().flat_map(|x| x) {
let mut spec = PackageIdSpec::parse(spec).chain_err(|| {
- human(format!("replacements must specify a valid semver \
- version to replace, but `{}` does not",
- spec))
+ format!("replacements must specify a valid semver \
+ version to replace, but `{}` does not",
+ spec)
})?;
if spec.url().is_none() {
spec.set_url(CRATES_IO.parse().unwrap());
let dep = replacement.to_dependency(spec.name(), cx, None)?;
let dep = {
- let version = spec.version().ok_or_else(||
- {
- human(format!("replacements must specify a version \
- to replace, but `{}` does not",
- spec))
+ let version = spec.version().ok_or_else(|| {
+ CargoError::from(format!("replacements must specify a version \
+ to replace, but `{}` does not",
+ spec))
})?;
let req = VersionReq::exact(version);
dep.clone_inner().set_version_req(req)
match self.name {
Some(ref name) => {
if name.trim().is_empty() {
- Err(human("library target names cannot be empty.".to_string()))
+ Err("library target names cannot be empty.".into())
} else if name.contains('-') {
- Err(human(format!("library target names cannot contain hyphens: {}",
- name)))
+ Err(format!("library target names cannot contain hyphens: {}",
+ name).into())
} else {
Ok(())
}
match self.name {
Some(ref name) => {
if name.trim().is_empty() {
- Err(human("binary target names cannot be empty.".to_string()))
+ Err("binary target names cannot be empty.".into())
} else {
Ok(())
}
},
- None => Err(human("binary target bin.name is required".to_string()))
+ None => Err("binary target bin.name is required".into())
}
}
match self.name {
Some(ref name) => {
if name.trim().is_empty() {
- Err(human("example target names cannot be empty".to_string()))
+ Err("example target names cannot be empty".into())
} else {
Ok(())
}
},
- None => Err(human("example target example.name is required".to_string()))
+ None => Err("example target example.name is required".into())
}
}
match self.name {
Some(ref name) => {
if name.trim().is_empty() {
- Err(human("test target names cannot be empty".to_string()))
+ Err("test target names cannot be empty".into())
} else {
Ok(())
}
},
- None => Err(human("test target test.name is required".to_string()))
+ None => Err("test target test.name is required".into())
}
}
match self.name {
Some(ref name) => {
if name.trim().is_empty() {
- Err(human("bench target names cannot be empty".to_string()))
+ Err("bench target names cannot be empty".into())
} else {
Ok(())
}
},
- None => Err(human("bench target bench.name is required".to_string()))
+ None => Err("bench target bench.name is required".into())
}
}
// A plugin requires exporting plugin_registrar so a crate cannot be
// both at once.
if self.plugin == Some(true) && self.proc_macro() == Some(true) {
- Err(human("lib.plugin and lib.proc-macro cannot both be true".to_string()))
+ Err("lib.plugin and lib.proc-macro cannot both be true".into())
} else {
Ok(())
}
let stderr = str::from_utf8(&output.stderr).unwrap();
assert!(stderr.contains("☃"), "no snowman\n{}", stderr);
- assert!(stderr.contains("unknown start of token"), "no message\n{}", stderr);
- }else{
- assert!(false, "an error kind other than ProcessErrorKind was encountered\n");
+ assert!(stderr.contains("unknown start of token"), "no message{}", stderr);
+ } else {
+ assert!(false, "an error kind other than ProcessErrorKind was encountered");
}
}